====== Tikiwiki2Dokuwiki ====== I've done this script to migrate our tikiwiki. It is provided as is without guaranty or support of any kind. Feel free to improve or modify it as you need. It export the pages from the Tikiwiki database and convert some of the syntax. There is no history export. After it's done, copy the files in /pages directory to your wiki. --- //[[jeanmichel.lacroix@gmail.com|Jean-Michel LACROIX]] 2008/04/11 12:02// Aditional notes: You need to run it from php. (~$php ./Tiki2Dokuwiki)\\ You need to copy /var/www/dokuwiki/inc/utf8.php to the directory you are running the script from.\\ You need to have created a "pages" directory in the directory you are running the script from.\\ You need to add the root mysql password.\\ You may need to change the mysql database from tikiwiki to whatever. Log in to mysql and execute "show databases;" to see what it may be called. * @param string $raw_id The pageid to clean * @param boolean $ascii Force ASCII */ function cleanID($raw_id,$ascii=false){ global $conf; static $sepcharpat = null; global $cache_cleanid; $cache = & $cache_cleanid; $sepchar = $conf['sepchar']; if($sepcharpat == null) // build string only once to save clock cycles $sepcharpat = '#\\'.$sepchar.'+#'; $id = trim($raw_id); $id = utf8_strtolower($id); //alternative namespace seperator $id = strtr($id,';',':'); if($conf['useslash']){ $id = strtr($id,'/',':'); }else{ $id = strtr($id,'/',$sepchar); } if($conf['deaccent'] == 2 || $ascii) $id = utf8_romanize($id); if($conf['deaccent'] || $ascii) $id = utf8_deaccent($id,-1); //remove specials $id = utf8_stripspecials($id,$sepchar,'\*'); if($ascii) $id = utf8_strip($id); //clean up $id = preg_replace($sepcharpat,$sepchar,$id); $id = preg_replace('#:+#',':',$id); $id = trim($id,':._-'); $id = preg_replace('#:[:\._\-]+#',':',$id); $cache[$raw_id] = $id; return($id); } print "
";


$query = "SELECT pageName, data FROM tiki_pages"; //  LIMIT 1,10  WHERE page_id = '1'

$link = mysql_connect("localhost", "root", "") or die("Could not connect : " . mysql_error()); 
mysql_select_db("tikiwiki") or die("Could not select database"); 
$result = mysql_query($query) or die("Query failed : " . mysql_error()); 
$x = 0;
while ($line = mysql_fetch_array($result)) { 
    $x++;
    print $x . " " . $line['pageName'] . "  --->  " .cleanID($line['pageName']) ."\n";

    // Conversion from Tikiwiki 2 Dokuwiki syntax
    
    $line['data'] = str_replace("\r", "", $line['data']);
    
    $line['data'] = preg_replace('/^\* */m', "  * ", $line['data']); // List
    $line['data'] = preg_replace('/^- */m', "  * ", $line['data']); // List
    $line['data'] = preg_replace('/^# */m', "  - ", $line['data']); // Numeroted list
    $line['data'] = preg_replace('/\[(.*?)\]/', "[[\\1]]", $line['data']); // Externals links
    $line['data'] = preg_replace('/\(\((.*?)\)\)/', "[[\\1]]", $line['data']); // Internals links
    
    $line['data'] = preg_replace("/^!!!(.+?)$/m", "==== \\1 ====", $line['data']); // Heading 3
    $line['data'] = preg_replace("/^!!(.+?)$/m", "===== \\1 =====", $line['data']); // Heading 2
    $line['data'] = preg_replace('/^!(.+?)$/m', "====== \\1 ======", $line['data']); // Heading 1
    $line['data'] = preg_replace('/^-=(.+?)=-$/m', "====== \\1 ======", $line['data']); // Heading 1
    $line['data'] = preg_replace('/( *: *)(?=====)/', " ", $line['data']); // Remove ":" from heading
    
    

    // table
    $line['data'] = preg_replace("/\[\[([^]]*)\|([^]]*)\]\]/m", "[[\\1__TEMP__\\2]]", $line['data']); // [[ | ]] -> [[ TEMP ]] to avoid conflict
    $line['data'] = preg_replace("/^\|\|$/m", "", $line['data']); // Remove "||"
    $line['data'] = preg_replace("/^\|\|/m", "", $line['data']);
    $line['data'] = preg_replace("/\|\|$/m", "", $line['data']);
    $line['data'] = preg_replace("/^([^|\n]+?\|[^\n]+?[^|\n])$/m", "|\\1|", $line['data']); // blabla | blabla
    $line['data'] = preg_replace("/^(\|)([^|\n]*?)$/m", "|\\2|", $line['data']); // | blabla
    $line['data'] = preg_replace("/^([^|\n]*?)(\|)$/m", "|\\1|", $line['data']); // blabla |
    $line['data'] = preg_replace("/\[\[([^]]*)__TEMP__([^]]*)\]\]/m", "[[\\1|\\2]]", $line['data']); // [[ TEMP ]] -> [[ | ]]
    // end of table conversion
    
    $line['data'] = preg_replace("/(\^)([^\^]*?)(\^)/", "\\2", $line['data']); // ^ -> 
    $line['data'] = preg_replace("!(~np~|~/np~)!", "", $line['data']); // remove ~np~
    
    $line['data'] = preg_replace("/~~#[0-9A-F]+:([^~]*)~~/i", "\\1", $line['data']); // Remove color
    $line['data'] = preg_replace("/__(.*?)__/", "**\\1**", $line['data']); // Bold
    
    //$line['data'] = preg_replace("/([^\n=])\n( *[^\n])/", "\\1\\\\\\\\\n\\2", $line['data']);

    // not all emote are in dokuwiki
    $tikiwiki_emote = array("(:biggrin:)", "(:confused:)", "(:cool:)", "(:cry:)", "(:eek:)", "(:evil:)", "(:exclaim:)", "(:frown:)", "(:idea:)", "(:lol:)", "(:mad:)", "(:mrgreen:)", "(:neutral:)", "(:question:)", "(:razz:)", "(:redface:)", "(:rolleyes:)", "(:sad:)", "(:smile:)", "(:surprised:)", "(:twisted:)", "(:wink:)", "(:arrow:)");
    $dokuwiki_emote = array(":-D"        , ":-?"         , "8-)",      ":-("    , "8-O"    , "8-)"     , ":!:"        , ":-("      , ":!:"     , "LOL"    , "8-O"    , "8-)"        , ":-|"        , ":?:"         , "^_^"     , ""           , ""            , ":-("    , ":-)"      , ":-O"          , "^_^"        , ";-)"     , "->");
    $line['data'] = str_replace($tikiwiki_emote, $dokuwiki_emote, $line['data']);

    $filename = dirname(__FILE__) . "/pages/" . cleanID($line['pageName']) . ".txt"; 
    $handle = fopen($filename, "w+"); 
    fwrite($handle, $line['data']);
    fclose ($handle);
}
mysql_free_result($result); 
mysql_close($link); 


==== Discussion ====

This php script does the job good, but only does 50% of the job. I have 67 wiki page but it only output (to ''%%%%/pages'' ) 60 pages. And the main issue is character outside of ASCII don't convert (I have ''/var/www/dokuwiki/inc/utf8.php'' include in %%%%), and some other syntax don't convert especially html tag ( ''%%{DIV(class="someclass")}some content{DIV}%%'' ).\\
But this script is a good starting point I think, it probably work well after some trim. Thanks for sharing :-) --- [[user>treegb]] 2016-10-07.